import tensorflow as tf
from tensorflow.keras import layers, models,preprocessing,regularizers,callbacks
from matplotlib import pyplot as plt
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Conv2D,MaxPool2D,Dropout,Flatten,Dense,BatchNormalization
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.preprocessing import image
from tensorflow.keras.callbacks import EarlyStopping
import numpy as np
sample_image = image.load_img("E:/Depression_project/Balanced data/test/nondepressed/1P307_no_silence.jpeg")
print(sample_image)
np.array(sample_image)
datagen = preprocessing.image.ImageDataGenerator(rescale=1/255)# normalisation
ts=(512,512)
train_generator = datagen.flow_from_directory(
'E:/Depression_project/Balanced data/train',
batch_size=10,
target_size=ts,
class_mode='binary')
val_generator = datagen.flow_from_directory(
'E:/Depression_project/Balanced data/val',
color_mode='rgb',
target_size=ts,
class_mode='binary')
test_generator = datagen.flow_from_directory(
'E:/Depression_project/Balanced data/test',
color_mode='rgb',
target_size=ts,
class_mode='binary')
# model
input_shape=(512, 512, 3)
model = models.Sequential()
model.add(layers.Conv2D(32, (3,3), activation='relu', input_shape=input_shape))
model.add(layers.MaxPooling2D((2,2), strides=2))
model.add(layers.Conv2D(128, (3, 3), activation='relu',kernel_regularizer=regularizers.l2(0.01)))
model.add(layers.MaxPooling2D(2,2))
model.add(layers.Flatten())
model.add(layers.Dense(128, activation='relu',kernel_regularizer=regularizers.l2(0.01)))
model.add(layers.Dropout(0.5))
model.add(layers.Dense(32, activation='relu',kernel_regularizer=regularizers.l2(0.0001)))
model.add(layers.Dropout(0.5))
model.add(layers.Dense(1, activation='sigmoid'))
model.compile(optimizer=tf.keras.optimizers.SGD(),
loss=tf.keras.losses.BinaryCrossentropy(),
metrics=['accuracy',
tf.keras.metrics.TrueNegatives(),
tf.keras.metrics.TruePositives(),
tf.keras.metrics.FalseNegatives(),
tf.keras.metrics.FalsePositives()])
model.summary()
# fitiing
history=model.fit_generator(train_generator,
epochs=10,
validation_data=val_generator,
shuffle=True,
callbacks=[callbacks.EarlyStopping(monitor='val_acc',
patience=5,
restore_best_weights=True)])
# using early stopping method to train the model to achieve maximum accuracy
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('Loss Graph')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['train', 'val'], loc='upper left')
plt.show()
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('Accuracy Graph')
plt.ylabel('Accuracy')
plt.xlabel('Epoch')
plt.legend(['train', 'val'], loc='upper left')
plt.show()
loss,accuracy,tn,tp,fn,fp=model.evaluate_generator(test_generator)
print("Accuracy:", accuracy)
print("True Negative:", tn)
print("True Positive:", tp)
print("False Negative:", fn)
print("False Positive:", fp)
train_generator.class_indices
import os
from IPython.display import Image,display
DATADIR = "E:/Depression_project/Balanced data/test"
CATEGORIES = ["depressed", "nondepressed"]
for category in CATEGORIES:
path = os.path.join(DATADIR,category)
for img in os.listdir(path):
display(Image(filename=os.path.join(path,img), width=200, height=200))
test_image = image.load_img(os.path.join(path,img), target_size = ts)
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis = 0)
result = model.predict(test_image)
print("Path of the Spectrogram image:", os.path.join(path,img))
if result[0][0] == 1:
prediction = 'This Spectrogram image is of Non-depressed category'
else:
prediction = 'This Spectrogram image is of Depressed category'
print(prediction)
from sklearn.metrics import classification_report, confusion_matrix
pred = model.predict_generator(test_generator)
pred = np.argmax(pred, axis=1)
print('Below is the Confusion Matrix for our model on test data:')
print(confusion_matrix(test_generator.classes, pred))
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print('Below is the Classification Report for our modelon test data:')
print(classification_report(test_generator.classes, pred, target_names=CATEGORIES))
tr_generator = datagen.flow_from_directory(
'E:/Depression_project/Divided_data_copy/train',
color_mode='rgb',
target_size=ts,
class_mode='binary')
train_predictions_baseline = model.predict(tr_generator)
test_predictions_baseline = model.predict(test_generator)
def plot_cm(labels, predictions, p=0.5):
cm = confusion_matrix(labels, predictions > p)
plt.figure(figsize=(5,5))
sns.heatmap(cm, annot=True, fmt="d")
plt.title('Confusion matrix @{:.2f}'.format(p))
plt.ylabel('Actual label')
plt.xlabel('Predicted label')
print('Legitimate Transactions Detected (True Negatives): ', cm[0][0])
print('Legitimate Transactions Incorrectly Detected (False Positives): ', cm[0][1])
print('Fraudulent Transactions Missed (False Negatives): ', cm[1][0])
print('Fraudulent Transactions Detected (True Positives): ', cm[1][1])
print('Total Fraudulent Transactions: ', np.sum(cm[1]))
baseline_results = model.evaluate(test_generator)
for name, value in zip(model.metrics_names, baseline_results):
print(name, ': ', value)
print()
plot_cm(train_predictions_baseline, test_predictions_baseline)